# Connecting to Google drive
from google.colab import drive
drive.mount('/content/drive')
pip install ts2vg
!pip install igraph
import pandas as pd
from glob import glob
import os
from ts2vg import NaturalVG
import numpy as np
from ts2vg import HorizontalVG
# Mount your Google Drive
drive.mount('/gdrive')
# A code block to loop through the above folder structure and generate HVG and NVG parameters
result = []
for method in ['HVG','NVG']:
nodes = []
links = []
degree = []
dia = []
lenn = []
activity = []
area = []
sub = []
atr = []
for i in range(1, 16):
p = "/gdrive/MyDrive/Project_2_IE5374/Subject"+" "+str(i)
for file_name in os.listdir(p):
f = "/" + p + "/" + file_name
for file_name_two in os.listdir(f):
if 'readMe' in file_name_two:
continue
t = file_name_two.split('_')
if '2' in t:
row = 3
else:
row = 2
df = pd.read_csv(f+'/'+file_name_two)
for j in ['x', 'y', 'z']:
attr = j
col = 'attr_'+attr
if method == 'NVG':
g = NaturalVG()
elif method == 'HVG':
g = HorizontalVG()
g.build(df[col].iloc[1000:2024])
ig_g = g.as_igraph()
nodes.append(ig_g.vcount())
links.append(ig_g.ecount())
degree.append(np.mean(ig_g.degree()))
dia.append(ig_g.diameter())
lenn.append(ig_g.average_path_length())
activity.append(t[1])
area.append((t[row]).split('.')[0])
sub.append("Subject "+str(i))
atr.append(attr)
df = pd.DataFrame()
df['Subject'] = sub
df['Activity'] = activity
df['Attribute'] = atr
df['Sensor_Area'] = area
df['Nodes'] = nodes
df['Number_of_Links'] = links
df['Average_Degree'] = degree
df['Network_Diameter'] = dia
df['Average_Path_Length'] = lenn
df['Method'] = method
result.append(df)
# HVG data table
result[0].head(5)
# NVG data table
result[1].head(5)
# Function to plot Scatter plots
import seaborn as sns
import matplotlib.pyplot as plt
def Scatter_Plot_TaskOne(activity, dataframe):
df_temp = dataframe.loc[dataframe['Activity'].isin(activity),:]
for q in df_temp['Sensor_Area'].unique():
df_q = df_temp.loc[df_temp['Sensor_Area'] == q]
for j in ['x', 'y', 'z']:
df_p = df_q.loc[df_temp['Attribute'] == j]
ax_dim = sns.scatterplot(data= df_p, x = "Average_Degree", y = "Network_Diameter", hue = "Activity")
ax_dim.set_title('Attribute:'+j+' Method:'+(df_p['Method'].unique())[0]+' Sensor:'+q)
plt.show()
# Scatter plot for HVG walking and running for attributes x,y and z
Scatter_Plot_TaskOne(['walking','running'], result[0])
# Scatter plot for HVG climbing up and climbing dowm for attributes x,y and z
Scatter_Plot_TaskOne(['climbingup','climbingdown'], result[0])
# Scatter plot for NVG walking and running for attributes x,y and z
Scatter_Plot_TaskOne(['walking','running'], result[1])
# Scatter plot for HVG climbing up and climbing dowm for attributes x,y and z
Scatter_Plot_TaskOne(['climbingup','climbingdown'], result[1])
''' This module has essential functions supporting
fast and effective computation of permutation entropy and
its different variations.'''
import numpy as np
def s_entropy(freq_list):
''' This function computes the shannon entropy of a given frequency distribution.
USAGE: shannon_entropy(freq_list)
ARGS: freq_list = Numeric vector representing the frequency distribution
OUTPUT: A numeric value representing shannon's entropy'''
freq_list = [element for element in freq_list if element != 0]
sh_entropy = 0.0
for freq in freq_list:
sh_entropy += freq * np.log(freq)
sh_entropy = -sh_entropy
return(sh_entropy)
def ordinal_patterns(ts, embdim, embdelay):
''' This function computes the ordinal patterns of a time series for a given embedding dimension and embedding delay.
USAGE: ordinal_patterns(ts, embdim, embdelay)
ARGS: ts = Numeric vector representing the time series, embdim = embedding dimension (3<=embdim<=7 prefered range), embdelay = embdding delay
OUPTUT: A numeric vector representing frequencies of ordinal patterns'''
m, t = embdim, embdelay
x = ts if isinstance(ts, np.ndarray) else np.array(ts)
tmp = np.zeros((x.shape[0], m))
for i in range(m):
tmp[:, i] = np.roll(x, i*t)
partition = tmp[(t*m-1):, :]
permutation = np.argsort(partition)
idx = _hash(permutation)
counts = np.zeros(np.math.factorial(m))
for i in range(counts.shape[0]):
counts[i] = (idx == i).sum()
return list(counts[counts != 0].astype(int))
def _hash(x):
m, n = x.shape
if n == 1:
return np.zeros(m)
return np.sum(np.apply_along_axis(lambda y: y < x[:, 0], 0, x), axis=1) * np.math.factorial(n-1) + _hash(x[:, 1:])
def p_entropy(op):
ordinal_pat = op
max_entropy = np.log(len(ordinal_pat))
p = np.divide(np.array(ordinal_pat), float(sum(ordinal_pat)))
return(s_entropy(p)/max_entropy)
def complexity(op):
''' This function computes the complexity of a time series defined as: Comp_JS = Q_o * JSdivergence * pe
Q_o = Normalizing constant
JSdivergence = Jensen-Shannon divergence
pe = permutation entopry
ARGS: ordinal pattern'''
pe = p_entropy(op)
constant1 = (0.5+((1 - 0.5)/len(op)))* np.log(0.5+((1 - 0.5)/len(op)))
constant2 = ((1 - 0.5)/len(op))*np.log((1 - 0.5)/len(op))*(len(op) - 1)
constant3 = 0.5*np.log(len(op))
Q_o = -1/(constant1+constant2+constant3)
temp_op_prob = np.divide(op, sum(op))
temp_op_prob2 = (0.5*temp_op_prob)+(0.5*(1/len(op)))
JSdivergence = (s_entropy(temp_op_prob2) - 0.5 * s_entropy(temp_op_prob) - 0.5 * np.log(len(op)))
Comp_JS = Q_o * JSdivergence * pe
return(Comp_JS)
def weighted_ordinal_patterns(ts, embdim, embdelay):
m, t = embdim, embdelay
x = ts if isinstance(ts, np.ndarray) else np.array(ts)
tmp = np.zeros((x.shape[0], m))
for i in range(m):
tmp[:, i] = np.roll(x, i*t)
partition = tmp[(t*m-1):, :]
xm = np.mean(partition, axis=1)
weight = np.mean((partition - xm[:, None])**2, axis=1)
permutation = np.argsort(partition)
idx = _hash(permutation)
counts = np.zeros(np.math.factorial(m))
for i in range(counts.shape[0]):
counts[i] = sum(weight[i == idx])
return list(counts[counts != 0])
# A code block to loop through the above folder structure and generate permutation entropy and complexity
embedded_dimensions = [3, 4, 5, 6]
embedded_dela = [1, 2, 3]
signal_len = [1024, 2048, 4096]
permutation_entropy_a = []
complexity_a = []
activity = []
area = []
sub = []
atr = []
len_a = []
delay_a = []
dim_a = []
for i in range(1, 16):
p = "/gdrive/MyDrive/Project_2_IE5374/Subject"+" "+str(i)
for file_name in os.listdir(p):
f = "/"+p+"/"+file_name
for file_name_two in os.listdir(f):
if 'readMe' in file_name_two:
continue;
t = file_name_two.split('_')
if '2' in t:
row = 3
else:
row = 2
df = pd.read_csv(f+'/'+file_name_two)
for j in ['x','y','z']:
attr = j
col = 'attr_'+attr
for lenn in signal_len:
for delay in embedded_dela:
for dim in embedded_dimensions:
op = ordinal_patterns(df[col].iloc[1000:(1000+lenn)], int(dim), int(delay))
permutation_entropy_a.append(p_entropy(op))
complexity_a.append(complexity(op))
activity.append(t[1])
area.append((t[row]).split('.')[0])
sub.append("Subject "+str(i))
atr.append(attr)
len_a.append(lenn)
delay_a.append(delay)
dim_a.append(dim)
else:continue;
df_task_two = pd.DataFrame()
df_task_two['Activity'] = activity
df_task_two['Attribute'] = atr
df_task_two['Sensor_Area'] = area
df_task_two['Subject'] = sub
df_task_two['Signal_Length'] = len_a
df_task_two['Embedded_Delay'] = delay_a
df_task_two['Embedded_Dimensions'] = dim_a
df_task_two['Permutation_Entropy'] = permutation_entropy_a
df_task_two['Complexity'] = complexity_a
df_task_two.head(5)
import seaborn as sns
import matplotlib.pyplot as plt
def Scatter_Plot(signal_length, delay, dimensions, activity, dataframe):
df_filtered = dataframe.loc[dataframe['Activity'].isin(activity),:]
for j in ['x','y','z']:
df_j = df_filtered.loc[df_filtered['Attribute'] == j]
for q in df_j['Sensor_Area'].unique():
df_t = df_filtered.loc[df_filtered['Sensor_Area'] == q]
for dim in dimensions:
ax_dim = sns.scatterplot(data= df_t[(df_t['Signal_Length'] == signal_length) & (df_t['Embedded_Delay'] == delay) & (df_t['Embedded_Dimensions'] == dim)], x="Permutation_Entropy", y="Complexity", hue="Activity")
ax_dim.set_title('Attribute:'+j+' Dimension:'+str(dim)+';'+' Signal Length:'+str(signal_length)+';'+' Embedded Delay:'+str(delay)+' Sensor Area:'+q)
plt.show()
Scatter_Plot(4096, 1, [3, 4, 5, 6], ['walking', 'running'], df_task_two)
Scatter_Plot(4096, 1, [3, 4, 5, 6], ['climbingup', 'climbingdown'], df_task_two)
!jupyter nbconvert --to html Project_2_IE5374-4.ipynb